use flate2::read::GzDecoder;
use flate2::{GzBuilder, Compression};
use git2;
-use tar::{Archive, Builder, Header};
+use tar::{Archive, Builder, Header, EntryType};
-use core::{SourceId, Package, PackageId, Workspace, Source};
+use core::{Package, Workspace, Source, SourceId};
use sources::PathSource;
use util::{self, CargoResult, human, internal, ChainError, Config, FileLock};
use ops::{self, DefaultExecutor};
human(format!("non-utf8 path in source directory: {}",
relative.display()))
})?;
- let mut file = File::open(file).chain_error(|| {
- human(format!("failed to open for archiving: `{}`", file.display()))
- })?;
config.shell().verbose(|shell| {
shell.status("Archiving", &relative)
})?;
// unpack the selectors 0.4.0 crate on crates.io. Either that or take a
// look at rust-lang/cargo#2326
let mut header = Header::new_ustar();
- let metadata = file.metadata().chain_error(|| {
- human(format!("could not learn metadata for: `{}`", relative))
- })?;
header.set_path(&path).chain_error(|| {
human(format!("failed to add to archive: `{}`", relative))
})?;
+ let mut file = File::open(file).chain_error(|| {
+ human(format!("failed to open for archiving: `{}`", file.display()))
+ })?;
+ let metadata = file.metadata().chain_error(|| {
+ human(format!("could not learn metadata for: `{}`", relative))
+ })?;
header.set_metadata(&metadata);
- header.set_cksum();
- ar.append(&header, &mut file).chain_error(|| {
- internal(format!("could not archive source file `{}`", relative))
- })?;
+ if relative == "Cargo.toml" {
+ let orig = Path::new(&path).with_file_name("Cargo.toml.orig");
+ header.set_path(&orig)?;
+ header.set_cksum();
+ ar.append(&header, &mut file).chain_error(|| {
+ internal(format!("could not archive source file `{}`", relative))
+ })?;
+
+ let mut header = Header::new_ustar();
+ let toml = pkg.to_registry_toml();
+ header.set_path(&path)?;
+ header.set_entry_type(EntryType::file());
+ header.set_mode(0o644);
+ header.set_size(toml.len() as u64);
+ header.set_cksum();
+ ar.append(&header, toml.as_bytes()).chain_error(|| {
+ internal(format!("could not archive source file `{}`", relative))
+ })?;
+ } else {
+ header.set_cksum();
+ ar.append(&header, &mut file).chain_error(|| {
+ internal(format!("could not archive source file `{}`", relative))
+ })?;
+ }
}
let encoder = ar.into_inner()?;
encoder.finish()?;
}
let mut archive = Archive::new(f);
archive.unpack(dst.parent().unwrap())?;
- let manifest_path = dst.join("Cargo.toml");
- // When packages are uploaded to a registry, all path dependencies are
- // implicitly converted to registry dependencies, so we rewrite those
- // dependencies here.
- //
- // We also make sure to point all paths at `dst` instead of the previous
- // location that the package was originally read from. In locking the
- // `SourceId` we're telling it that the corresponding `PathSource` will be
- // considered updated and we won't actually read any packages.
- let cratesio = SourceId::crates_io(config)?;
- let precise = Some("locked".to_string());
- let new_src = SourceId::for_path(&dst)?.with_precise(precise);
- let new_pkgid = PackageId::new(pkg.name(), pkg.version(), &new_src)?;
- let new_summary = pkg.summary().clone().map_dependencies(|d| {
- if !d.source_id().is_path() { return d }
- d.clone_inner().set_source_id(cratesio.clone()).into_dependency()
- });
- let mut new_manifest = pkg.manifest().clone();
- new_manifest.set_summary(new_summary.override_id(new_pkgid));
- let new_pkg = Package::new(new_manifest, &manifest_path);
-
- // Now that we've rewritten all our path dependencies, compile it!
+ // Manufacture an ephemeral workspace to ensure that even if the top-level
+ // package has a workspace we can still build our new crate.
+ let id = SourceId::for_path(&dst)?;
+ let mut src = PathSource::new(&dst, &id, ws.config());
+ let new_pkg = src.root_package()?;
let ws = Workspace::ephemeral(new_pkg, config, None, true)?;
+
ops::compile_ws(&ws, None, &ops::CompileOptions {
config: config,
jobs: opts.jobs,
use std::collections::{HashMap, HashSet, BTreeSet};
-use std::default::Default;
use std::fmt;
use std::fs;
use std::path::{Path, PathBuf};
+use std::rc::Rc;
use std::str;
use toml;
use semver::{self, VersionReq};
+use serde::ser;
use serde::de::{self, Deserialize};
use serde_ignored;
}
})?;
- return match manifest.to_real_manifest(source_id, &layout, config) {
+ let manifest = Rc::new(manifest);
+ return match TomlManifest::to_real_manifest(&manifest,
+ source_id,
+ &layout,
+ config) {
Ok((mut manifest, paths)) => {
for key in unused {
manifest.add_warning(format!("unused manifest key: {}", key));
Ok((EitherManifest::Real(manifest), paths))
}
Err(e) => {
- match manifest.to_virtual_manifest(source_id, &layout, config) {
+ match TomlManifest::to_virtual_manifest(&manifest,
+ source_id,
+ &layout,
+ config) {
Ok((m, paths)) => Ok((EitherManifest::Virtual(m), paths)),
Err(..) => Err(e),
}
type TomlTestTarget = TomlTarget;
type TomlBenchTarget = TomlTarget;
+#[derive(Serialize)]
+#[serde(untagged)]
pub enum TomlDependency {
Simple(String),
Detailed(DetailedTomlDependency)
}
}
-#[derive(Deserialize, Clone, Default)]
+#[derive(Deserialize, Serialize, Clone, Default)]
pub struct DetailedTomlDependency {
version: Option<String>,
path: Option<String>,
default_features2: Option<bool>,
}
-#[derive(Deserialize)]
+#[derive(Deserialize, Serialize)]
pub struct TomlManifest {
package: Option<Box<TomlProject>>,
project: Option<Box<TomlProject>>,
badges: Option<HashMap<String, HashMap<String, String>>>,
}
-#[derive(Deserialize, Clone, Default)]
+#[derive(Deserialize, Serialize, Clone, Default)]
pub struct TomlProfiles {
test: Option<TomlProfile>,
doc: Option<TomlProfile>,
}
}
-#[derive(Clone)]
+impl ser::Serialize for TomlOptLevel {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where S: ser::Serializer,
+ {
+ match self.0.parse::<u32>() {
+ Ok(n) => n.serialize(serializer),
+ Err(_) => self.0.serialize(serializer),
+ }
+ }
+}
+
+#[derive(Clone, Serialize)]
+#[serde(untagged)]
pub enum U32OrBool {
U32(u32),
Bool(bool),
}
}
-#[derive(Deserialize, Clone, Default)]
+#[derive(Deserialize, Serialize, Clone, Default)]
pub struct TomlProfile {
#[serde(rename = "opt-level")]
opt_level: Option<TomlOptLevel>,
overflow_checks: Option<bool>,
}
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, Serialize)]
+#[serde(untagged)]
pub enum StringOrBool {
String(String),
Bool(bool),
}
}
-#[derive(Deserialize)]
+#[derive(Deserialize, Serialize, Clone)]
pub struct TomlProject {
name: String,
version: TomlVersion,
repository: Option<String>,
}
-#[derive(Deserialize)]
+#[derive(Deserialize, Serialize)]
pub struct TomlWorkspace {
members: Option<Vec<String>>,
exclude: Option<Vec<String>>,
}
+#[derive(Clone)]
pub struct TomlVersion {
version: semver::Version,
}
}
}
+impl ser::Serialize for TomlVersion {
+ fn serialize<S>(&self, s: S) -> Result<S::Ok, S::Error>
+ where S: ser::Serializer,
+ {
+ self.version.to_string().serialize(s)
+ }
+}
+
+
impl TomlProject {
pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult<PackageId> {
PackageId::new(&self.name, self.version.version.clone(),
}
impl TomlManifest {
- fn to_real_manifest(&self,
+ pub fn prepare_for_publish(&self) -> TomlManifest {
+ let mut package = self.package.as_ref()
+ .or(self.project.as_ref())
+ .unwrap()
+ .clone();
+ package.workspace = None;
+ return TomlManifest {
+ package: Some(package),
+ project: None,
+ profile: self.profile.clone(),
+ lib: self.lib.clone(),
+ bin: self.bin.clone(),
+ example: self.example.clone(),
+ test: self.test.clone(),
+ bench: self.bench.clone(),
+ dependencies: map_deps(self.dependencies.as_ref()),
+ dev_dependencies: map_deps(self.dev_dependencies.as_ref()
+ .or(self.dev_dependencies2.as_ref())),
+ dev_dependencies2: None,
+ build_dependencies: map_deps(self.build_dependencies.as_ref()
+ .or(self.build_dependencies2.as_ref())),
+ build_dependencies2: None,
+ features: self.features.clone(),
+ target: self.target.as_ref().map(|target_map| {
+ target_map.iter().map(|(k, v)| {
+ (k.clone(), TomlPlatform {
+ dependencies: map_deps(v.dependencies.as_ref()),
+ dev_dependencies: map_deps(v.dev_dependencies.as_ref()
+ .or(v.dev_dependencies2.as_ref())),
+ dev_dependencies2: None,
+ build_dependencies: map_deps(v.build_dependencies.as_ref()
+ .or(v.build_dependencies2.as_ref())),
+ build_dependencies2: None,
+ })
+ }).collect()
+ }),
+ replace: None,
+ workspace: None,
+ badges: self.badges.clone(),
+ };
+
+ fn map_deps(deps: Option<&HashMap<String, TomlDependency>>)
+ -> Option<HashMap<String, TomlDependency>>
+ {
+ let deps = match deps {
+ Some(deps) => deps,
+ None => return None
+ };
+ Some(deps.iter().map(|(k, v)| (k.clone(), map_dependency(v))).collect())
+ }
+
+ fn map_dependency(dep: &TomlDependency) -> TomlDependency {
+ match *dep {
+ TomlDependency::Detailed(ref d) => {
+ let mut d = d.clone();
+ d.path.take(); // path dependencies become crates.io deps
+ TomlDependency::Detailed(d)
+ }
+ TomlDependency::Simple(ref s) => {
+ TomlDependency::Simple(s.clone())
+ }
+ }
+ }
+ }
+
+ fn to_real_manifest(me: &Rc<TomlManifest>,
source_id: &SourceId,
layout: &Layout,
config: &Config)
let mut nested_paths = vec![];
let mut warnings = vec![];
- let project = self.project.as_ref().or_else(|| self.package.as_ref());
+ let project = me.project.as_ref().or_else(|| me.package.as_ref());
let project = project.chain_error(|| {
human("no `package` or `project` section found.")
})?;
// If we have a lib with a path, we're done
// If we have a lib with no path, use the inferred lib or_else package name
- let lib = match self.lib {
+ let lib = match me.lib {
Some(ref lib) => {
lib.validate_library_name()?;
lib.validate_crate_type()?;
None => inferred_lib_target(&project.name, layout),
};
- let bins = match self.bin {
+ let bins = match me.bin {
Some(ref bins) => {
for target in bins {
target.validate_binary_name()?;
}
}
- let examples = match self.example {
+ let examples = match me.example {
Some(ref examples) => {
for target in examples {
target.validate_example_name()?;
None => inferred_example_targets(layout)
};
- let tests = match self.test {
+ let tests = match me.test {
Some(ref tests) => {
for target in tests {
target.validate_test_name()?;
None => inferred_test_targets(layout)
};
- let benches = match self.bench {
+ let benches = match me.bench {
Some(ref benches) => {
for target in benches {
target.validate_bench_name()?;
}
// processing the custom build script
- let new_build = self.maybe_custom_build(&project.build, &layout.root);
+ let new_build = me.maybe_custom_build(&project.build, &layout.root);
// Get targets
let targets = normalize(&layout.root,
}
// Collect the deps
- process_dependencies(&mut cx, self.dependencies.as_ref(),
+ process_dependencies(&mut cx, me.dependencies.as_ref(),
None)?;
- let dev_deps = self.dev_dependencies.as_ref()
- .or(self.dev_dependencies2.as_ref());
+ let dev_deps = me.dev_dependencies.as_ref()
+ .or(me.dev_dependencies2.as_ref());
process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?;
- let build_deps = self.build_dependencies.as_ref()
- .or(self.build_dependencies2.as_ref());
+ let build_deps = me.build_dependencies.as_ref()
+ .or(me.build_dependencies2.as_ref());
process_dependencies(&mut cx, build_deps, Some(Kind::Build))?;
- for (name, platform) in self.target.iter().flat_map(|t| t) {
+ for (name, platform) in me.target.iter().flat_map(|t| t) {
cx.platform = Some(name.parse()?);
process_dependencies(&mut cx, platform.dependencies.as_ref(),
None)?;
process_dependencies(&mut cx, dev_deps, Some(Kind::Development))?;
}
- replace = self.replace(&mut cx)?;
+ replace = me.replace(&mut cx)?;
}
{
let exclude = project.exclude.clone().unwrap_or(Vec::new());
let include = project.include.clone().unwrap_or(Vec::new());
- let summary = Summary::new(pkgid, deps, self.features.clone()
+ let summary = Summary::new(pkgid, deps, me.features.clone()
.unwrap_or_else(HashMap::new))?;
let metadata = ManifestMetadata {
description: project.description.clone(),
repository: project.repository.clone(),
keywords: project.keywords.clone().unwrap_or(Vec::new()),
categories: project.categories.clone().unwrap_or(Vec::new()),
- badges: self.badges.clone().unwrap_or_else(HashMap::new),
+ badges: me.badges.clone().unwrap_or_else(HashMap::new),
};
- let workspace_config = match (self.workspace.as_ref(),
+ let workspace_config = match (me.workspace.as_ref(),
project.workspace.as_ref()) {
(Some(config), None) => {
WorkspaceConfig::Root {
`[workspace]`, only one can be specified")
}
};
- let profiles = build_profiles(&self.profile);
+ let profiles = build_profiles(&me.profile);
let publish = project.publish.unwrap_or(true);
let mut manifest = Manifest::new(summary,
targets,
profiles,
publish,
replace,
- workspace_config);
+ workspace_config,
+ me.clone());
if project.license_file.is_some() && project.license.is_some() {
manifest.add_warning("only one of `license` or \
`license-file` is necessary".to_string());
Ok((manifest, nested_paths))
}
- fn to_virtual_manifest(&self,
+ fn to_virtual_manifest(me: &Rc<TomlManifest>,
source_id: &SourceId,
layout: &Layout,
config: &Config)
-> CargoResult<(VirtualManifest, Vec<PathBuf>)> {
- if self.project.is_some() {
+ if me.project.is_some() {
bail!("virtual manifests do not define [project]");
}
- if self.package.is_some() {
+ if me.package.is_some() {
bail!("virtual manifests do not define [package]");
}
- if self.lib.is_some() {
+ if me.lib.is_some() {
bail!("virtual manifests do not specifiy [lib]");
}
- if self.bin.is_some() {
+ if me.bin.is_some() {
bail!("virtual manifests do not specifiy [[bin]]");
}
- if self.example.is_some() {
+ if me.example.is_some() {
bail!("virtual manifests do not specifiy [[example]]");
}
- if self.test.is_some() {
+ if me.test.is_some() {
bail!("virtual manifests do not specifiy [[test]]");
}
- if self.bench.is_some() {
+ if me.bench.is_some() {
bail!("virtual manifests do not specifiy [[bench]]");
}
let mut nested_paths = Vec::new();
let mut warnings = Vec::new();
let mut deps = Vec::new();
- let replace = self.replace(&mut Context {
+ let replace = me.replace(&mut Context {
pkgid: None,
deps: &mut deps,
source_id: source_id,
platform: None,
layout: layout,
})?;
- let profiles = build_profiles(&self.profile);
- let workspace_config = match self.workspace {
+ let profiles = build_profiles(&me.profile);
+ let workspace_config = match me.workspace {
Some(ref config) => {
WorkspaceConfig::Root {
members: config.members.clone(),
}
}
-#[derive(Default, Deserialize, Debug, Clone)]
+#[derive(Default, Serialize, Deserialize, Debug, Clone)]
struct TomlTarget {
name: Option<String>,
}
}
+impl ser::Serialize for PathValue {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where S: ser::Serializer,
+ {
+ self.0.serialize(serializer)
+ }
+}
+
/// Corresponds to a `target` entry, but `TomlTarget` is already used.
-#[derive(Deserialize)]
+#[derive(Serialize, Deserialize)]
struct TomlPlatform {
dependencies: Option<HashMap<String, TomlDependency>>,
#[serde(rename = "build-dependencies")]
use cargotest::{cargo_process, process};
use cargotest::support::{project, execs, paths, git, path2url, cargo_exe};
use flate2::read::GzDecoder;
-use hamcrest::{assert_that, existing_file, contains};
+use hamcrest::{assert_that, existing_file, contains, equal_to};
use tar::Archive;
#[test]
let fname = f.header().path_bytes();
let fname = &*fname;
assert!(fname == b"foo-0.0.1/Cargo.toml" ||
+ fname == b"foo-0.0.1/Cargo.toml.orig" ||
fname == b"foo-0.0.1/src/main.rs",
"unexpected filename: {:?}", f.header().path())
}
let fname = f.header().path_bytes();
let fname = &*fname;
assert!(fname == b"nested-0.0.1/Cargo.toml" ||
+ fname == b"nested-0.0.1/Cargo.toml.orig" ||
fname == b"nested-0.0.1/src/main.rs",
"unexpected filename: {:?}", f.header().path())
}
to proceed despite this, pass the `--allow-dirty` flag
"));
}
+
+#[test]
+fn generated_manifest() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+ exclude = ["*.txt"]
+ license = "MIT"
+ description = "foo"
+
+ [workspace]
+
+ [dependencies]
+ bar = { path = "bar", version = "0.1" }
+ "#)
+ .file("src/main.rs", "")
+ .file("bar/Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ "#)
+ .file("bar/src/lib.rs", "");
+
+ assert_that(p.cargo_process("package").arg("--no-verify"),
+ execs().with_status(0));
+
+ let f = File::open(&p.root().join("target/package/foo-0.0.1.crate")).unwrap();
+ let mut rdr = GzDecoder::new(f).unwrap();
+ let mut contents = Vec::new();
+ rdr.read_to_end(&mut contents).unwrap();
+ let mut ar = Archive::new(&contents[..]);
+ let mut entry = ar.entries().unwrap()
+ .map(|f| f.unwrap())
+ .find(|e| e.path().unwrap().ends_with("Cargo.toml"))
+ .unwrap();
+ let mut contents = String::new();
+ entry.read_to_string(&mut contents).unwrap();
+ assert_that(&contents[..], equal_to(
+r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "foo"
+version = "0.0.1"
+authors = []
+exclude = ["*.txt"]
+description = "foo"
+license = "MIT"
+[dependencies.bar]
+version = "0.1"
+"#));
+}
+
+#[test]
+fn ignore_workspace_specifier() {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.0.1"
+ authors = []
+
+ [workspace]
+
+ [dependencies]
+ bar = { path = "bar", version = "0.1" }
+ "#)
+ .file("src/main.rs", "")
+ .file("bar/Cargo.toml", r#"
+ [package]
+ name = "bar"
+ version = "0.1.0"
+ authors = []
+ workspace = ".."
+ "#)
+ .file("bar/src/lib.rs", "");
+ p.build();
+
+ assert_that(p.cargo("package").arg("--no-verify").cwd(p.root().join("bar")),
+ execs().with_status(0));
+
+ let f = File::open(&p.root().join("target/package/bar-0.1.0.crate")).unwrap();
+ let mut rdr = GzDecoder::new(f).unwrap();
+ let mut contents = Vec::new();
+ rdr.read_to_end(&mut contents).unwrap();
+ let mut ar = Archive::new(&contents[..]);
+ let mut entry = ar.entries().unwrap()
+ .map(|f| f.unwrap())
+ .find(|e| e.path().unwrap().ends_with("Cargo.toml"))
+ .unwrap();
+ let mut contents = String::new();
+ entry.read_to_string(&mut contents).unwrap();
+ assert_that(&contents[..], equal_to(
+r#"# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g. crates.io) dependencies
+#
+# If you believe there's an error in this file please file an
+# issue against the rust-lang/cargo repository. If you're
+# editing this file be aware that the upstream Cargo.toml
+# will likely look very different (and much more reasonable)
+
+[package]
+name = "bar"
+version = "0.1.0"
+authors = []
+"#));
+}